Code
library(magrittr)First, simulate genotype data. Then, for each subject’s genotype, simulate replicate traits.
library(magrittr)set.seed(2023-04-15)
n_tr <- 10000
n_te <- 100
n <- n_tr + n_te
# n is total number of subjects
n_reps <- 50
# n_reps is number of replicates
n_snp <- 1
# n_snp is number of SNPs
# simulate genotypes matrix
geno <- matrix(sample(c(0,1,2), n*n_snp, replace=TRUE, prob=c(0.25,0.5,0.25)), nrow=n, ncol=n_snp)
# simulate phenotype
beta <- 1
y <- as.numeric(geno %*% beta) %*% t(rep(1, n_reps)) +
matrix(data = rnorm(n * n_reps), nrow = n, ncol = n_reps)
# prepare for splitting into training and testing
# get test subject ids
test_ids <- sample(1:n, n_te, replace = FALSE)
# organize data
dat <- tibble::as_tibble(y) %>%
dplyr::rename_with(function(x){ num <- stringr::str_extract(x, "[0-9]+")
return(paste0("pheno", num))}
) %>%
dplyr::bind_cols(geno %>% tibble::as_tibble() %>% dplyr::rename(geno = 1)) %>%
dplyr::mutate(id = 1:n) %>% # fix this when using more than one SNP
dplyr::mutate(in_test_set = id %in% test_ids)Warning: The `x` argument of `as_tibble.matrix()` must have unique column names if
`.name_repair` is omitted as of tibble 2.0.0.
ℹ Using compatibility `.name_repair`.
# split into training and testing
training <- dat %>% dplyr::filter(!in_test_set)
testing <- dat %>% dplyr::filter(in_test_set)
testing2 <- testing %>% dplyr::mutate(fold = as.integer(NA))
# use all training with leave K out
alpha <- 0.1Above, we specified the number of replicates for the simulations. We created 50 replicate traits for the same 1.01^{4} subjects. Note that each subject has only 1 SNP(s).
import numpy as np
import pandas as pd
def leastsq_minL2(X,Y,X1,tol=1e-8):
uX,dX,vX = np.linalg.svd(X)
rX = (dX>=dX[0]*tol).sum()
betahat = (vX[:rX].T/dX[:rX]).dot(uX[:,:rX].T.dot(Y))
return X1.dot(betahat)
def compute_PIs(X,Y,X1,alpha = 0.1, K=10, fit_muh_fun = leastsq_minL2):
n = len(Y)
#n1 = X1.shape[0]
n1 = len(X1)
###############################
# CV+
###############################
n_K = np.floor(n/K).astype(int)
base_inds_to_delete = np.arange(n_K).astype(int)
resids_LKO = np.zeros(n)
muh_LKO_vals_testpoint = np.zeros((n,n1))
for i in range(K):
inds_to_delete = (base_inds_to_delete + n_K*i).astype(int)
X0 = np.delete(X, inds_to_delete)
X0_2d = X0[:, np.newaxis]
Y0 = np.delete(Y, inds_to_delete)
X0d = X[inds_to_delete]
X0d_2d = X0d[:, np.newaxis]
X1_2d = X1[:, np.newaxis]
muh_vals_LKO = fit_muh_fun(X0_2d, Y0, np.r_[X0d_2d, X1_2d])
resids_LKO[inds_to_delete] = np.abs(Y[inds_to_delete] - muh_vals_LKO[:n_K, 0])
for inner_K in range(n_K):
muh_LKO_vals_testpoint[inds_to_delete[inner_K]] = muh_vals_LKO[n_K:, 0]
ind_Kq = (np.ceil((1-alpha)*(n+1))).astype(int)
PIs_dict = {'CV+' : pd.DataFrame(\
np.c_[np.sort(muh_LKO_vals_testpoint.T - resids_LKO,axis=1).T[-ind_Kq], \
np.sort(muh_LKO_vals_testpoint.T + resids_LKO,axis=1).T[ind_Kq-1]],\
columns = ['lower','upper'])}
return pd.concat(PIs_dict.values(), axis=1, keys=PIs_dict.keys())# simulation
# simulation
n_vals = np.array([100, 1000, 10000, 100000]) # training set size
n1 = 100 # test set size
SNR = 10
ntrial = 50
alpha = 0.1
#dim_vals = np.arange(5,205,5)
d = 1
beta = np.random.normal(size=d)
beta = beta/np.sqrt((beta**2).sum()) * np.sqrt(SNR)
X1 = np.random.normal(size=(n1,d))
Y1 = X1.dot(beta) + np.random.normal(size=n1)
method = 'CV+'
# define new objects for storing Y and X
Y_dic = dict.fromkeys(n_vals, 0)
X_dic = dict.fromkeys(n_vals, 0)
# loop over n_vals
for n in n_vals:
results = pd.DataFrame(columns = ['itrial','d','method','coverage','width'])
Xa = np.zeros((n, ntrial))
Ya = np.zeros((n, ntrial))
for itrial in range(ntrial):
X = np.random.normal(size=(n,d))
Xa[:, itrial] = X[:,0]
Y = X.dot(beta) + np.random.normal(size=n)
Ya[:, itrial] = Y
# store X, Y for later use with R code
# store as numpy arrays
PIs = compute_PIs(X = X, Y = Y, X1 = X1)
coverage = ((PIs[method]['lower'] <= Y1)&(PIs[method]['upper'] >= Y1)).mean()
width = (PIs[method]['upper'] - PIs[method]['lower']).mean()
results.loc[len(results)]=[itrial,d,method,coverage,width]
X_dic[n] = Xa
Y_dic[n] = YasaveRDS(py$PIs, file = "PIs.rds")res_list <- list()
vars_list_list_list <- list()
n_folds_vec <- c(5, 10, 20, 50)
for (n_folds in n_folds_vec){
# partition training data into K folds
folds <- split(training$id, sample(rep(1:n_folds, length.out = n_tr)))
training2_pre <- training %>%
dplyr::mutate(fold = id %>% purrr::map_int(~which(sapply(folds, function(x) . %in% x)))) %>%
dplyr::arrange(fold)
tictoc::tic() # timing
tl <- list()
n_per_fold_vec <- c(n_tr / n_folds, n_tr / (n_folds * 10))
vars_list_list <- list()
for (n_per_fold in n_per_fold_vec){
training2 <- training2_pre %>%
dplyr::group_by(fold) %>%
dplyr::slice_sample(n = n_per_fold) %>%
dplyr::ungroup()
# store each trait's outputs
out <- list()
vars_list <- list()
# loop over traits
for (trait_num in 1:n_reps){
tr2_one_trait <- training2 %>%
dplyr::select(id, fold, geno, tidyselect::ends_with(paste0("pheno", trait_num))) %>%
dplyr::rename(pheno = tidyselect::ends_with(paste0("pheno", trait_num)))
te2_one_trait <- testing2 %>%
dplyr::select(id, fold, geno, tidyselect::ends_with(paste0("pheno", trait_num))) %>%
dplyr::rename(pheno = tidyselect::ends_with(paste0("pheno", trait_num)))
# regress leaving one fold out
preds <- list()
vars <- list()
for (fold_num in 1:n_folds) {
# get training data
train <- tr2_one_trait %>% dplyr::filter(fold != fold_num)
# get testing data
test <- tr2_one_trait %>% dplyr::filter(fold == fold_num)
# fit model
fit <- lm(pheno ~ 1 + geno, data = train)
# predict
foo <- test %>% dplyr::bind_rows(te2_one_trait)
foo$pred <- predict(fit, newdata = foo)
foo$fold_left_out <- fold_num
result <- foo %>%
dplyr::mutate(beta1_hat = coef(fit)[2],
beta0_hat = coef(fit)[1],
se_beta1_hat = summary(fit)$coefficients[2, 2],
se_beta0_hat = summary(fit)$coefficients[1, 2]
)
# save predictions
preds[[fold_num]] <- result
te_geno_mat <- cbind(1, te2_one_trait$geno)
tr_geno_mat <- cbind(1, train$geno)
vars[[fold_num]] <- diag(te_geno_mat %*% solve(t(tr_geno_mat) %*% tr_geno_mat) %*% t(te_geno_mat))
}
vars_list[[trait_num]] <- vars
# assemble predicted values
# get absolute residuals
preds_training <- preds %>%
dplyr::bind_rows() %>%
dplyr::filter(!is.na(fold)) %>% # keep only training data
dplyr::mutate(absolute_residual = abs(pheno - pred)) %>%
dplyr::select( - fold_left_out)
preds_test <- preds %>%
dplyr::bind_rows() %>%
dplyr::filter(is.na(fold))
# get indexes
plus_index <- ceiling((1 - alpha) * (nrow(preds_training) + 1))
minus_index <- floor(alpha * (nrow(preds_training) + 1))
# go one by one through test set (testing2)
test_list <- list()
for (i in 1:nrow(testing2)){
tt <- testing2[i, ]
pt2 <- preds_test %>%
dplyr::filter(id == tt$id) %>% # our only use of tt
dplyr::rename_with(function(x)paste0("test_", x))
# pt2 contains the five predicted values for a single test subject
nrow(pt2) # 5
preds_all <- preds_training %>%
dplyr::left_join(pt2, by = c("fold" = "test_fold_left_out")) %>%
dplyr::mutate(test_fitted_plus_absolute_residual = test_pred + absolute_residual,
test_fitted_minus_absolute_residual = test_pred - absolute_residual)
uu <- sort(preds_all$test_fitted_plus_absolute_residual)[plus_index]
ll <- sort(preds_all$test_fitted_minus_absolute_residual)[minus_index]
# make a tibble with exactly one row
test_list[[i]] <- preds_all %>%
dplyr::select(test_id, test_geno, test_pheno, test_beta1_hat, fold) %>%
dplyr::mutate(lower = ll, upper = uu) %>%
dplyr::distinct() %>%
tidyr::pivot_wider(names_from = fold,
values_from = test_beta1_hat,
names_prefix = "beta1_hat_fold_"
)
}
test_tib <- test_list %>%
dplyr::bind_rows() %>%
dplyr::mutate(in_interval = test_pheno >= lower & test_pheno <= upper) %>%
dplyr::mutate(interval_width = upper - lower) %>%
dplyr::mutate(training_set_size = n_per_fold * n_folds,
trait_num = trait_num)
out[[trait_num]] <- test_tib
}
tl[[as.character(n_per_fold * n_folds)]] <- out
vars_list_list[[as.character(n_per_fold * n_folds)]] <- vars_list
}
res_list[[as.character(n_folds)]] <- tl
vars_list_list_list[[as.character(n_folds)]] <- vars_list_list
tictoc::toc() # timing
}189.042 sec elapsed
193.453 sec elapsed
200.736 sec elapsed
224.502 sec elapsed
saveRDS(res_list, "res_list.rds")
saveRDS(vars_list_list_list, "vars_list_list_list.rds")#test_tib_thin <- test_tib %>%
# dplyr::select(test_id, test_geno)
tt_intermediate <- tl %>%
dplyr::bind_rows(.id = "id")
results <- tt_intermediate %>%
dplyr::group_by(training_set_size, trait_num) %>%
dplyr::summarise(mean_interval_width = mean(interval_width),
sd_interval_width = sd(interval_width),
mean_in_interval = mean(in_interval),
sd_in_interval = sd(in_interval),
beta1_hat_fold_1 = mean(beta1_hat_fold_1),
beta1_hat_fold_2 = mean(beta1_hat_fold_2),
beta1_hat_fold_3 = mean(beta1_hat_fold_3),
beta1_hat_fold_4 = mean(beta1_hat_fold_4),
beta1_hat_fold_5 = mean(beta1_hat_fold_5),
median_interval_width = median(interval_width)
) %>%
dplyr::ungroup() %>%
dplyr::mutate(mean_b1 = purrr::pmap_dbl(.l = list(beta1_hat_fold_1,
beta1_hat_fold_2,
beta1_hat_fold_3,
beta1_hat_fold_4,
beta1_hat_fold_5),
.f = function(x, y, z, w, v) mean(c(x, y, z, w, v))),
sd_b1 = purrr::pmap_dbl(.l = list(beta1_hat_fold_1,
beta1_hat_fold_2,
beta1_hat_fold_3,
beta1_hat_fold_4,
beta1_hat_fold_5),
.f = function(x, y, z, w, v) sd(c(x, y, z, w, v)))
) `summarise()` has grouped output by 'training_set_size'. You can override using
the `.groups` argument.
results %>%
knitr::kable() %>%
print()| training_set_size | trait_num | mean_interval_width | sd_interval_width | mean_in_interval | sd_in_interval | beta1_hat_fold_1 | beta1_hat_fold_2 | beta1_hat_fold_3 | beta1_hat_fold_4 | beta1_hat_fold_5 | median_interval_width | mean_b1 | sd_b1 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
| 1000 | 1 | 3.269663 | 0.0020488 | 0.90 | 0.3015113 | 0.9065156 | 0.9121913 | 0.9060416 | 0.9048478 | 0.9188715 | 3.268853 | 0.9096936 | 0.0058607 |
| 1000 | 2 | 3.344751 | 0.0023533 | 0.91 | 0.2876235 | 1.0445959 | 1.0464540 | 1.0443695 | 1.0332784 | 1.0392876 | 3.343471 | 1.0415971 | 0.0053573 |
| 1000 | 3 | 3.274158 | 0.0010671 | 0.94 | 0.2386833 | 0.9813570 | 0.9765217 | 0.9883275 | 0.9702371 | 0.9831070 | 3.274411 | 0.9799101 | 0.0068596 |
| 1000 | 4 | 3.230431 | 0.0016232 | 0.86 | 0.3487351 | 0.9111773 | 0.9119977 | 0.9151600 | 0.9135225 | 0.9184691 | 3.229089 | 0.9140653 | 0.0028941 |
| 1000 | 5 | 3.286248 | 0.0010091 | 0.92 | 0.2726599 | 0.9771232 | 0.9775981 | 0.9772230 | 0.9771884 | 0.9765326 | 3.287091 | 0.9771331 | 0.0003834 |
| 1000 | 6 | 3.320046 | 0.0015877 | 0.91 | 0.2876235 | 0.9946676 | 0.9978031 | 1.0119408 | 0.9960080 | 1.0008850 | 3.318530 | 1.0002609 | 0.0069322 |
| 1000 | 7 | 3.296412 | 0.0000000 | 0.89 | 0.3144660 | 0.9582067 | 0.9469858 | 0.9592551 | 0.9581995 | 0.9497622 | 3.296412 | 0.9544819 | 0.0056777 |
| 1000 | 8 | 3.317076 | 0.0016047 | 0.91 | 0.2876235 | 1.0228708 | 1.0077899 | 0.9995220 | 0.9989276 | 1.0127770 | 3.318388 | 1.0083775 | 0.0099684 |
| 1000 | 9 | 3.382647 | 0.0035541 | 0.90 | 0.3015113 | 0.9246682 | 0.9303803 | 0.9239508 | 0.9242849 | 0.9226916 | 3.384131 | 0.9251952 | 0.0029920 |
| 1000 | 10 | 3.290401 | 0.0007914 | 0.85 | 0.3588703 | 0.9839049 | 0.9978354 | 0.9718226 | 0.9894680 | 0.9905806 | 3.290028 | 0.9867223 | 0.0096936 |
| 1000 | 11 | 3.406227 | 0.0020223 | 0.92 | 0.2726599 | 1.0551749 | 1.0710386 | 1.0711884 | 1.0673222 | 1.0589285 | 3.406510 | 1.0647305 | 0.0073005 |
| 1000 | 12 | 3.279119 | 0.0032105 | 0.93 | 0.2564324 | 1.1133192 | 1.1120771 | 1.1019529 | 1.1167999 | 1.0992799 | 3.278216 | 1.1086858 | 0.0076258 |
| 1000 | 13 | 3.279855 | 0.0004555 | 0.84 | 0.3684529 | 1.0272876 | 1.0214734 | 1.0331090 | 1.0402648 | 1.0277830 | 3.280171 | 1.0299836 | 0.0070712 |
| 1000 | 14 | 3.221249 | 0.0007762 | 0.85 | 0.3588703 | 0.9793727 | 0.9827520 | 0.9806434 | 0.9683182 | 0.9839642 | 3.221994 | 0.9790101 | 0.0062382 |
| 1000 | 15 | 3.304753 | 0.0024335 | 0.91 | 0.2876235 | 1.0406638 | 1.0280019 | 1.0373577 | 1.0310384 | 1.0272879 | 3.303581 | 1.0328699 | 0.0058970 |
| 1000 | 16 | 3.351328 | 0.0016736 | 0.90 | 0.3015113 | 1.0630697 | 1.0605694 | 1.0561969 | 1.0552951 | 1.0522724 | 3.351343 | 1.0574807 | 0.0043107 |
| 1000 | 17 | 3.339365 | 0.0075022 | 0.92 | 0.2726599 | 1.0488649 | 1.0595641 | 1.0578711 | 1.0498131 | 1.0581815 | 3.344983 | 1.0548589 | 0.0050902 |
| 1000 | 18 | 3.388968 | 0.0055995 | 0.89 | 0.3144660 | 0.9920304 | 0.9821338 | 0.9725179 | 0.9795348 | 0.9800220 | 3.389309 | 0.9812478 | 0.0070302 |
| 1000 | 19 | 3.234607 | 0.0019073 | 0.91 | 0.2876235 | 1.0390109 | 1.0389052 | 1.0323969 | 1.0266685 | 1.0271897 | 3.236107 | 1.0328343 | 0.0060223 |
| 1000 | 20 | 3.281298 | 0.0006824 | 0.89 | 0.3144660 | 0.9269577 | 0.9225596 | 0.9040631 | 0.9020235 | 0.9139623 | 3.280792 | 0.9139132 | 0.0109922 |
| 1000 | 21 | 3.249487 | 0.0003837 | 0.91 | 0.2876235 | 0.9732818 | 0.9754162 | 0.9797883 | 0.9740106 | 0.9758393 | 3.249844 | 0.9756672 | 0.0025254 |
| 1000 | 22 | 3.243039 | 0.0012789 | 0.84 | 0.3684529 | 1.0176006 | 1.0150738 | 1.0252889 | 1.0219341 | 1.0264490 | 3.244161 | 1.0212693 | 0.0048804 |
| 1000 | 23 | 3.338817 | 0.0036488 | 0.93 | 0.2564324 | 0.9772634 | 0.9640830 | 0.9729947 | 0.9631500 | 0.9686649 | 3.341840 | 0.9692312 | 0.0059684 |
| 1000 | 24 | 3.425091 | 0.0037725 | 0.91 | 0.2876235 | 1.0430569 | 1.0346992 | 1.0301639 | 1.0403048 | 1.0304766 | 3.425764 | 1.0357403 | 0.0057934 |
| 1000 | 25 | 3.282211 | 0.0063019 | 0.94 | 0.2386833 | 1.0470885 | 1.0346177 | 1.0349041 | 1.0429263 | 1.0443080 | 3.276165 | 1.0407689 | 0.0056866 |
| 1000 | 26 | 3.213914 | 0.0017614 | 0.92 | 0.2726599 | 0.9783352 | 0.9903133 | 0.9872459 | 0.9844983 | 0.9827820 | 3.213518 | 0.9846349 | 0.0045332 |
| 1000 | 27 | 3.361788 | 0.0028798 | 0.86 | 0.3487351 | 0.9303066 | 0.8989762 | 0.9201872 | 0.9251130 | 0.9236144 | 3.360140 | 0.9196395 | 0.0121121 |
| 1000 | 28 | 3.379681 | 0.0023469 | 0.90 | 0.3015113 | 0.9623537 | 0.9656788 | 0.9583561 | 0.9563788 | 0.9583019 | 3.380762 | 0.9602139 | 0.0037503 |
| 1000 | 29 | 3.377388 | 0.0022226 | 0.95 | 0.2190429 | 1.0268036 | 1.0402019 | 1.0375707 | 1.0408578 | 1.0284271 | 3.376673 | 1.0347722 | 0.0066728 |
| 1000 | 30 | 3.342649 | 0.0035026 | 0.91 | 0.2876235 | 0.9871109 | 0.9863959 | 0.9989223 | 0.9974864 | 0.9843121 | 3.339364 | 0.9908455 | 0.0068148 |
| 1000 | 31 | 3.293695 | 0.0004773 | 0.83 | 0.3775252 | 0.9277105 | 0.9324002 | 0.9427483 | 0.9241295 | 0.9257188 | 3.293649 | 0.9305415 | 0.0074974 |
| 1000 | 32 | 3.333125 | 0.0047400 | 0.90 | 0.3015113 | 0.9819737 | 0.9903822 | 0.9906274 | 1.0054269 | 0.9960257 | 3.335702 | 0.9928872 | 0.0086253 |
| 1000 | 33 | 3.324350 | 0.0021101 | 0.93 | 0.2564324 | 1.0466477 | 1.0511812 | 1.0496322 | 1.0506528 | 1.0393048 | 3.322694 | 1.0474837 | 0.0048971 |
| 1000 | 34 | 3.302055 | 0.0021719 | 0.91 | 0.2876235 | 0.9796539 | 0.9925029 | 0.9815801 | 0.9880037 | 0.9858494 | 3.302621 | 0.9855180 | 0.0051228 |
| 1000 | 35 | 3.312941 | 0.0043681 | 0.93 | 0.2564324 | 1.0056364 | 1.0019997 | 1.0080133 | 1.0110617 | 0.9930155 | 3.313399 | 1.0039453 | 0.0069516 |
| 1000 | 36 | 3.317381 | 0.0024905 | 0.91 | 0.2876235 | 0.9635208 | 0.9649013 | 0.9629495 | 0.9610914 | 0.9588560 | 3.315525 | 0.9622638 | 0.0023449 |
| 1000 | 37 | 3.321484 | 0.0028642 | 0.91 | 0.2876235 | 1.0256105 | 1.0296355 | 1.0360205 | 1.0385688 | 1.0457209 | 3.323085 | 1.0351112 | 0.0078357 |
| 1000 | 38 | 3.291529 | 0.0023839 | 0.87 | 0.3379977 | 1.0279565 | 1.0339062 | 1.0369824 | 1.0349016 | 1.0320923 | 3.292975 | 1.0331678 | 0.0034065 |
| 1000 | 39 | 3.283259 | 0.0014321 | 0.93 | 0.2564324 | 1.0222746 | 1.0293725 | 1.0190563 | 1.0315141 | 1.0233214 | 3.283279 | 1.0251078 | 0.0051737 |
| 1000 | 40 | 3.254464 | 0.0013634 | 0.88 | 0.3265986 | 1.0535078 | 1.0519398 | 1.0618976 | 1.0659103 | 1.0481263 | 3.254882 | 1.0562764 | 0.0073706 |
| 1000 | 41 | 3.310142 | 0.0020953 | 0.93 | 0.2564324 | 1.0414822 | 1.0385111 | 1.0437353 | 1.0400287 | 1.0442206 | 3.311724 | 1.0415956 | 0.0024213 |
| 1000 | 42 | 3.296851 | 0.0026634 | 0.87 | 0.3379977 | 0.9895380 | 0.9838824 | 0.9892810 | 0.9767553 | 0.9979014 | 3.299058 | 0.9874716 | 0.0078110 |
| 1000 | 43 | 3.285878 | 0.0006534 | 0.87 | 0.3379977 | 0.9526910 | 0.9505892 | 0.9448929 | 0.9606618 | 0.9462730 | 3.286320 | 0.9510216 | 0.0062453 |
| 1000 | 44 | 3.395559 | 0.0037983 | 0.93 | 0.2564324 | 0.9509533 | 0.9426644 | 0.9480655 | 0.9336279 | 0.9281187 | 3.397338 | 0.9406859 | 0.0096377 |
| 1000 | 45 | 3.384050 | 0.0009418 | 0.90 | 0.3015113 | 1.0281364 | 1.0346181 | 1.0401679 | 1.0469562 | 1.0417212 | 3.384337 | 1.0383199 | 0.0071934 |
| 1000 | 46 | 3.361692 | 0.0012464 | 0.84 | 0.3684529 | 1.0500323 | 1.0631663 | 1.0451493 | 1.0532041 | 1.0730478 | 3.362904 | 1.0569199 | 0.0111683 |
| 1000 | 47 | 3.385481 | 0.0039291 | 0.87 | 0.3379977 | 0.9316182 | 0.9322810 | 0.9346191 | 0.9302921 | 0.9173239 | 3.386566 | 0.9292268 | 0.0068363 |
| 1000 | 48 | 3.324511 | 0.0021389 | 0.88 | 0.3265986 | 0.9783534 | 0.9858048 | 0.9770990 | 0.9870862 | 0.9822730 | 3.326377 | 0.9821233 | 0.0044063 |
| 1000 | 49 | 3.263428 | 0.0031921 | 0.88 | 0.3265986 | 1.0090399 | 1.0239101 | 1.0106192 | 1.0135401 | 1.0133401 | 3.265303 | 1.0140899 | 0.0058062 |
| 1000 | 50 | 3.383038 | 0.0030075 | 0.92 | 0.2726599 | 0.9909738 | 1.0001257 | 0.9973402 | 0.9833335 | 0.9952424 | 3.384251 | 0.9934031 | 0.0065455 |
| 10000 | 1 | 3.270154 | 0.0004563 | 0.89 | 0.3144660 | 0.9702581 | 0.9696415 | 0.9725096 | 0.9666922 | 0.9664444 | 3.269968 | 0.9691092 | 0.0025549 |
| 10000 | 2 | 3.307498 | 0.0006781 | 0.90 | 0.3015113 | 1.0068604 | 1.0082770 | 1.0059084 | 0.9994799 | 1.0064373 | 3.307266 | 1.0053926 | 0.0034202 |
| 10000 | 3 | 3.305666 | 0.0004471 | 0.94 | 0.2386833 | 0.9952225 | 0.9907088 | 0.9930630 | 0.9954499 | 0.9957652 | 3.305828 | 0.9940419 | 0.0021456 |
| 10000 | 4 | 3.297982 | 0.0004889 | 0.88 | 0.3265986 | 0.9905126 | 0.9920318 | 0.9917018 | 0.9938894 | 0.9901000 | 3.298420 | 0.9916471 | 0.0014882 |
| 10000 | 5 | 3.311855 | 0.0002613 | 0.92 | 0.2726599 | 0.9896307 | 0.9899452 | 0.9920244 | 0.9902287 | 0.9901152 | 3.312098 | 0.9903888 | 0.0009417 |
| 10000 | 6 | 3.299189 | 0.0001528 | 0.91 | 0.2876235 | 0.9866049 | 0.9861679 | 0.9884676 | 0.9901826 | 0.9866258 | 3.299099 | 0.9876098 | 0.0016891 |
| 10000 | 7 | 3.294630 | 0.0009200 | 0.89 | 0.3144660 | 1.0159411 | 1.0133711 | 1.0149591 | 1.0154214 | 1.0162966 | 3.294148 | 1.0151979 | 0.0011406 |
| 10000 | 8 | 3.290669 | 0.0002296 | 0.90 | 0.3015113 | 1.0051638 | 1.0038946 | 1.0043200 | 1.0072521 | 1.0082875 | 3.290570 | 1.0057836 | 0.0019053 |
| 10000 | 9 | 3.288675 | 0.0009101 | 0.87 | 0.3379977 | 0.9800655 | 0.9792149 | 0.9779191 | 0.9764331 | 0.9820672 | 3.289365 | 0.9791400 | 0.0021359 |
| 10000 | 10 | 3.294845 | 0.0006079 | 0.85 | 0.3588703 | 0.9928709 | 0.9917809 | 0.9889710 | 0.9926706 | 0.9953541 | 3.295227 | 0.9923295 | 0.0022988 |
| 10000 | 11 | 3.344140 | 0.0004833 | 0.92 | 0.2726599 | 0.9951009 | 0.9952136 | 0.9929867 | 0.9919202 | 0.9957589 | 3.344597 | 0.9941961 | 0.0016537 |
| 10000 | 12 | 3.268679 | 0.0002491 | 0.93 | 0.2564324 | 1.0210084 | 1.0186432 | 1.0193218 | 1.0222108 | 1.0224976 | 3.268835 | 1.0207364 | 0.0017126 |
| 10000 | 13 | 3.301849 | 0.0001953 | 0.85 | 0.3588703 | 1.0024792 | 1.0048416 | 1.0054082 | 1.0095662 | 1.0053171 | 3.302035 | 1.0055224 | 0.0025560 |
| 10000 | 14 | 3.277762 | 0.0003858 | 0.86 | 0.3487351 | 1.0145859 | 1.0152086 | 1.0133260 | 1.0109958 | 1.0160803 | 3.278137 | 1.0140393 | 0.0019751 |
| 10000 | 15 | 3.250141 | 0.0005377 | 0.90 | 0.3015113 | 0.9795953 | 0.9815929 | 0.9785730 | 0.9821062 | 0.9805872 | 3.250432 | 0.9804909 | 0.0014417 |
| 10000 | 16 | 3.249044 | 0.0004771 | 0.89 | 0.3144660 | 0.9985478 | 0.9973796 | 0.9991926 | 0.9985771 | 0.9990079 | 3.249487 | 0.9985410 | 0.0007058 |
| 10000 | 17 | 3.291970 | 0.0004443 | 0.91 | 0.2876235 | 1.0105684 | 1.0107225 | 1.0080642 | 1.0088449 | 1.0113155 | 3.292373 | 1.0099031 | 0.0013794 |
| 10000 | 18 | 3.314226 | 0.0003154 | 0.88 | 0.3265986 | 0.9867850 | 0.9857631 | 0.9865365 | 0.9867038 | 0.9881312 | 3.313972 | 0.9867839 | 0.0008551 |
| 10000 | 19 | 3.278342 | 0.0007176 | 0.91 | 0.2876235 | 1.0007617 | 1.0007086 | 1.0018955 | 1.0020897 | 1.0013254 | 3.277835 | 1.0013562 | 0.0006330 |
| 10000 | 20 | 3.296240 | 0.0002258 | 0.89 | 0.3144660 | 0.9693706 | 0.9709751 | 0.9676739 | 0.9677549 | 0.9748950 | 3.296422 | 0.9701339 | 0.0029862 |
| 10000 | 21 | 3.274367 | 0.0001712 | 0.92 | 0.2726599 | 1.0084111 | 1.0099171 | 1.0057049 | 1.0077447 | 1.0086067 | 3.274410 | 1.0080769 | 0.0015424 |
| 10000 | 22 | 3.268773 | 0.0007709 | 0.85 | 0.3588703 | 0.9778421 | 0.9767741 | 0.9783067 | 0.9803107 | 0.9781879 | 3.268425 | 0.9782843 | 0.0012838 |
| 10000 | 23 | 3.312616 | 0.0007980 | 0.92 | 0.2726599 | 1.0180404 | 1.0196089 | 1.0170376 | 1.0190702 | 1.0164472 | 3.313095 | 1.0180408 | 0.0013289 |
| 10000 | 24 | 3.257415 | 0.0002974 | 0.90 | 0.3015113 | 0.9955714 | 0.9952546 | 0.9984949 | 0.9976871 | 0.9959567 | 3.257691 | 0.9965929 | 0.0014190 |
| 10000 | 25 | 3.295426 | 0.0002744 | 0.94 | 0.2386833 | 1.0181587 | 1.0156793 | 1.0150684 | 1.0111794 | 1.0142163 | 3.295356 | 1.0148604 | 0.0025277 |
| 10000 | 26 | 3.299758 | 0.0005659 | 0.93 | 0.2564324 | 1.0004877 | 0.9988031 | 1.0017621 | 0.9971864 | 1.0004475 | 3.300080 | 0.9997374 | 0.0017711 |
| 10000 | 27 | 3.319314 | 0.0018348 | 0.85 | 0.3588703 | 0.9938729 | 0.9890072 | 0.9927564 | 0.9911029 | 0.9888362 | 3.318052 | 0.9911151 | 0.0022325 |
| 10000 | 28 | 3.246731 | 0.0005051 | 0.90 | 0.3015113 | 0.9897918 | 0.9934104 | 0.9913962 | 0.9922393 | 0.9920222 | 3.246702 | 0.9917720 | 0.0013255 |
| 10000 | 29 | 3.307375 | 0.0002463 | 0.96 | 0.1969464 | 0.9833423 | 0.9846720 | 0.9834209 | 0.9847303 | 0.9846258 | 3.307402 | 0.9841583 | 0.0007105 |
| 10000 | 30 | 3.261481 | 0.0003443 | 0.90 | 0.3015113 | 0.9897589 | 0.9928072 | 0.9883256 | 0.9902908 | 0.9905425 | 3.261813 | 0.9903450 | 0.0016222 |
| 10000 | 31 | 3.290697 | 0.0002159 | 0.82 | 0.3861229 | 0.9922776 | 0.9958630 | 0.9971857 | 0.9948750 | 0.9956903 | 3.290675 | 0.9951783 | 0.0018212 |
| 10000 | 32 | 3.244219 | 0.0003519 | 0.90 | 0.3015113 | 0.9980821 | 1.0018115 | 0.9987914 | 0.9999459 | 1.0005863 | 3.244546 | 0.9998435 | 0.0014701 |
| 10000 | 33 | 3.305079 | 0.0002525 | 0.92 | 0.2726599 | 0.9957653 | 0.9933296 | 0.9917444 | 0.9944223 | 0.9959041 | 3.305276 | 0.9942331 | 0.0017454 |
| 10000 | 34 | 3.304851 | 0.0005965 | 0.91 | 0.2876235 | 0.9895069 | 0.9962579 | 0.9943861 | 0.9943288 | 0.9932770 | 3.305276 | 0.9935513 | 0.0025030 |
| 10000 | 35 | 3.281931 | 0.0005446 | 0.93 | 0.2564324 | 0.9684046 | 0.9681545 | 0.9675809 | 0.9725219 | 0.9681103 | 3.281415 | 0.9689544 | 0.0020167 |
| 10000 | 36 | 3.291591 | 0.0008047 | 0.92 | 0.2726599 | 1.0045561 | 1.0022102 | 1.0023347 | 1.0041388 | 1.0032860 | 3.290827 | 1.0033052 | 0.0010489 |
| 10000 | 37 | 3.286436 | 0.0003196 | 0.91 | 0.2876235 | 1.0096608 | 1.0070383 | 1.0104226 | 1.0109595 | 1.0099677 | 3.286549 | 1.0096098 | 0.0015187 |
| 10000 | 38 | 3.292834 | 0.0001629 | 0.87 | 0.3379977 | 0.9992385 | 1.0001884 | 0.9967046 | 1.0015196 | 0.9996198 | 3.292883 | 0.9994542 | 0.0017635 |
| 10000 | 39 | 3.275772 | 0.0005378 | 0.93 | 0.2564324 | 1.0072665 | 1.0123215 | 1.0116986 | 1.0097169 | 1.0096925 | 3.276295 | 1.0101392 | 0.0019890 |
| 10000 | 40 | 3.267455 | 0.0001610 | 0.89 | 0.3144660 | 1.0026875 | 0.9972174 | 1.0022171 | 1.0019432 | 1.0002842 | 3.267612 | 1.0008699 | 0.0022335 |
| 10000 | 41 | 3.275336 | 0.0006660 | 0.92 | 0.2726599 | 0.9793571 | 0.9795859 | 0.9790515 | 0.9740876 | 0.9764112 | 3.274790 | 0.9776987 | 0.0023896 |
| 10000 | 42 | 3.307056 | 0.0003436 | 0.86 | 0.3487351 | 1.0095064 | 1.0082879 | 1.0087154 | 1.0058256 | 1.0090916 | 3.307382 | 1.0082854 | 0.0014471 |
| 10000 | 43 | 3.285535 | 0.0003932 | 0.86 | 0.3487351 | 1.0164067 | 1.0162693 | 1.0148107 | 1.0183307 | 1.0158239 | 3.285173 | 1.0163283 | 0.0012822 |
| 10000 | 44 | 3.307926 | 0.0003645 | 0.92 | 0.2726599 | 1.0042092 | 0.9999930 | 0.9992875 | 1.0017402 | 0.9998453 | 3.307590 | 1.0010150 | 0.0020079 |
| 10000 | 45 | 3.334554 | 0.0007744 | 0.90 | 0.3015113 | 0.9996387 | 0.9985265 | 1.0006972 | 0.9968591 | 0.9978923 | 3.334912 | 0.9987228 | 0.0014949 |
| 10000 | 46 | 3.317126 | 0.0004997 | 0.84 | 0.3684529 | 1.0054035 | 1.0014818 | 0.9999203 | 1.0028222 | 1.0091165 | 3.316666 | 1.0037488 | 0.0036127 |
| 10000 | 47 | 3.267832 | 0.0002567 | 0.86 | 0.3487351 | 0.9959679 | 0.9906880 | 0.9923052 | 0.9910029 | 0.9933180 | 3.267895 | 0.9926564 | 0.0021293 |
| 10000 | 48 | 3.298945 | 0.0002295 | 0.87 | 0.3379977 | 0.9994844 | 1.0003358 | 1.0000943 | 0.9975878 | 0.9985393 | 3.298980 | 0.9992083 | 0.0011408 |
| 10000 | 49 | 3.270811 | 0.0006287 | 0.88 | 0.3265986 | 0.9828106 | 0.9834019 | 0.9791488 | 0.9849327 | 0.9813058 | 3.271317 | 0.9823200 | 0.0021978 |
| 10000 | 50 | 3.295095 | 0.0003907 | 0.92 | 0.2726599 | 0.9744410 | 0.9798955 | 0.9773824 | 0.9741856 | 0.9790768 | 3.295288 | 0.9769962 | 0.0026131 |
library(ggplot2)
tt_intermediate %>%
ggplot(aes(y = interval_width, colour = as.factor(training_set_size), x = as.factor(trait_num))) +
geom_boxplot()#ggsave(here::here("figures", "interval_width_boxplot.png"), width = 10, height = 10)p1 <- results %>%
ggplot(aes(x = mean_b1, y = median_interval_width, colour = as.factor(training_set_size), replicate_num = trait_num)) +
geom_point()
plotly::ggplotly(p1, tooltip = c("x", "y", "colour", "replicate_num"))sessioninfo::session_info()─ Session info ───────────────────────────────────────────────────────────────
setting value
version R version 4.3.0 (2023-04-21)
os Ubuntu 18.04.6 LTS
system x86_64, linux-gnu
ui X11
language en_US:
collate en_US.UTF-8
ctype en_US.UTF-8
tz America/Detroit
date 2023-05-07
pandoc 1.19.2.4 @ /usr/bin/ (via rmarkdown)
─ Packages ───────────────────────────────────────────────────────────────────
package * version date (UTC) lib source
cli 3.6.1 2023-03-23 [1] CRAN (R 4.2.3)
colorspace 2.1-0 2023-01-23 [1] CRAN (R 4.2.2)
crosstalk 1.2.0 2021-11-04 [1] CRAN (R 4.1.2)
data.table 1.14.8 2023-02-17 [1] CRAN (R 4.2.2)
digest 0.6.31 2022-12-11 [1] CRAN (R 4.2.2)
dplyr 1.1.1 2023-03-22 [1] CRAN (R 4.2.3)
ellipsis 0.3.2 2021-04-29 [2] CRAN (R 4.2.1)
evaluate 0.20 2023-01-17 [1] CRAN (R 4.2.2)
fansi 1.0.4 2023-01-22 [1] CRAN (R 4.2.2)
farver 2.1.1 2022-07-06 [1] CRAN (R 4.2.3)
fastmap 1.1.1 2023-02-24 [1] CRAN (R 4.2.3)
generics 0.1.3 2022-07-05 [1] CRAN (R 4.2.3)
ggplot2 * 3.4.2 2023-04-03 [1] CRAN (R 4.2.3)
glue 1.6.2 2022-02-24 [1] CRAN (R 4.2.0)
gtable 0.3.3 2023-03-21 [1] CRAN (R 4.2.3)
here 1.0.1 2020-12-13 [2] CRAN (R 4.1.1)
htmltools 0.5.5 2023-03-23 [1] CRAN (R 4.2.3)
htmlwidgets 1.6.2 2023-03-17 [1] CRAN (R 4.2.2)
httr 1.4.5 2023-02-24 [1] CRAN (R 4.2.3)
jsonlite 1.8.4 2022-12-06 [1] CRAN (R 4.2.3)
knitr 1.42 2023-01-25 [1] CRAN (R 4.2.3)
labeling 0.4.2 2020-10-20 [2] CRAN (R 4.0.3)
lattice 0.21-8 2023-04-05 [1] CRAN (R 4.2.3)
lazyeval 0.2.2 2019-03-15 [2] CRAN (R 4.0.3)
lifecycle 1.0.3 2022-10-07 [1] CRAN (R 4.2.2)
magrittr * 2.0.3 2022-03-30 [1] CRAN (R 4.2.0)
Matrix 1.5-4 2023-04-04 [1] CRAN (R 4.2.3)
munsell 0.5.0 2018-06-12 [2] CRAN (R 4.0.3)
pillar 1.9.0 2023-03-22 [1] CRAN (R 4.2.3)
pkgconfig 2.0.3 2019-09-22 [2] CRAN (R 4.0.3)
plotly 4.10.1 2022-11-07 [1] CRAN (R 4.2.2)
png 0.1-8 2022-11-29 [1] CRAN (R 4.2.3)
purrr 1.0.1 2023-01-10 [1] CRAN (R 4.2.2)
R6 2.5.1 2021-08-19 [2] CRAN (R 4.1.1)
rappdirs 0.3.3 2021-01-31 [2] CRAN (R 4.0.3)
Rcpp 1.0.10 2023-01-22 [1] CRAN (R 4.2.2)
reticulate 1.28 2023-01-27 [1] CRAN (R 4.2.3)
rlang 1.1.0 2023-03-14 [1] CRAN (R 4.2.2)
rmarkdown 2.21 2023-03-26 [1] CRAN (R 4.2.3)
rprojroot 2.0.3 2022-04-02 [2] CRAN (R 4.2.0)
scales 1.2.1 2022-08-20 [1] CRAN (R 4.2.3)
sessioninfo 1.2.2 2021-12-06 [1] CRAN (R 4.1.2)
stringi 1.7.12 2023-01-11 [1] CRAN (R 4.2.2)
stringr 1.5.0 2022-12-02 [1] CRAN (R 4.2.3)
tibble 3.2.1 2023-03-20 [1] CRAN (R 4.2.3)
tictoc 1.1 2022-09-03 [1] CRAN (R 4.2.1)
tidyr 1.3.0 2023-01-24 [1] CRAN (R 4.2.3)
tidyselect 1.2.0 2022-10-10 [1] CRAN (R 4.2.2)
utf8 1.2.3 2023-01-31 [1] CRAN (R 4.2.3)
vctrs 0.6.1 2023-03-22 [1] CRAN (R 4.2.3)
viridisLite 0.4.1 2022-08-22 [1] CRAN (R 4.2.3)
withr 2.5.0 2022-03-03 [1] CRAN (R 4.2.0)
xfun 0.38 2023-03-24 [1] CRAN (R 4.2.3)
yaml 2.3.7 2023-01-23 [1] CRAN (R 4.2.3)
[1] /net/mulan/home/fredboe/R/x86_64-pc-linux-gnu-library/4.0
[2] /net/mario/cluster/lib/R/site-library-bionic-40
[3] /usr/local/lib/R/site-library
[4] /usr/lib/R/site-library
[5] /usr/lib/R/library
─ Python configuration ───────────────────────────────────────────────────────
python: /net/mulan/home/fredboe/miniconda3/envs/barber1/bin/python3
libpython: /net/mulan/home/fredboe/miniconda3/envs/barber1/lib/libpython3.10.so
pythonhome: /net/mulan/home/fredboe/miniconda3/envs/barber1:/net/mulan/home/fredboe/miniconda3/envs/barber1
version: 3.10.9 (main, Mar 8 2023, 10:47:38) [GCC 11.2.0]
numpy: /net/mulan/home/fredboe/miniconda3/envs/barber1/lib/python3.10/site-packages/numpy
numpy_version: 1.23.5
python versions found:
/net/mulan/home/fredboe/miniconda3/envs/barber1/bin/python3
/net/mulan/home/fredboe/miniconda3/envs/barber1/bin/python
/net/mulan/home/fredboe/miniconda3/bin/python
/net/mulan/home/fredboe/miniconda3/envs/ldsc/bin/python
──────────────────────────────────────────────────────────────────────────────
# git commit info
gr <- git2r::repository(here::here()) %>%
git2r::commits()
gr[[1]] [af93d90] 2023-05-07: fix: fixed typo